In [2]:
# JSAnimation import available at https://github.com/jakevdp/JSAnimation
%pylab inline
from JSAnimation import IPython_display
from matplotlib import animation

# create a simple animation
fig = plt.figure()
ax = plt.axes(xlim=(0, 10), ylim=(-2, 2))
line, = ax.plot([], [], lw=2)

x = np.linspace(0, 10, 1000)

def init():
    line.set_data([], [])
    return line,

def animate(i):
    line.set_data(x, np.cos(i * 0.02 * np.pi) * np.sin(x - i * 0.02 * np.pi))
    return line,

animation.FuncAnimation(fig, animate, init_func=init,
                        frames=100, interval=20, blit=True)
Out[2]:


Once Loop Reflect
In [3]:
# JSAnimation import available at https://github.com/jakevdp/JSAnimation
from JSAnimation import IPython_display
from matplotlib import animation

# create a simple animation
fig = plt.figure()
ax = plt.axes(xlim=(0, 10), ylim=(-2, 2))
line, = ax.plot([], [], lw=2)

x = np.linspace(0, 10, 1000)

def init():
    line.set_data([], [])
    return line,

def animate(i):
    line.set_data(x, np.cos(i * 0.02 * np.pi) * np.sin(x - i * 0.02 * np.pi))
    return line,

animation.FuncAnimation(fig, animate, init_func=init,
                        frames=100, interval=20, blit=True)
Out[3]:


Once Loop Reflect
In [5]:
import numpy as np
from math import *
from pylab import *
from matplotlib import pyplot as plt
from matplotlib import animation

def generate(X, Y, phi):
    R = 1 - np.sqrt(X**2 + Y**2)
    return np.cos(2 * np.pi * X + phi) * R

# First set up the figure, the axis, and the plot element we want to animate
fig = plt.figure()
ax1 = plt.subplot(131)
ax2 = plt.subplot(132,projection='3d')

line, = ax1.plot([], [], lw=2, color='b')
x = np.linspace(0, 10, 1000)
line.set_data(x, np.cos(0) * np.sin(x ))


xs = np.linspace(-1, 1, 50)
ys = np.linspace(-1, 1, 50)
X, Y = np.meshgrid(xs, ys)
Z = generate(X, Y, 0.0)
scat = ax2.plot_wireframe(X, Y, Z, rstride=2, cstride=2)
ax2.set_zlim(-1,1)


# animation function.  This is called sequentially
def animate(i):
    line.set_data(x, np.cos(i * 0.02 * np.pi) * np.sin(x - i * 0.02 * np.pi))

    ax2.cla()
    phi = i * 360 / 2 / np.pi / 100
    Z = generate(X, Y, phi)
    scat = ax.plot_wireframe(X, Y, Z, rstride=2, cstride=2)
    ax2.set_zlim(-1,1)
    
    return [scat,line,]

# call the animator. 

animation.FuncAnimation(fig, animate,
                        frames=100, interval=20, blit=True)
# plt.show()
Out[5]:


Once Loop Reflect
In [11]:
c = demo.cost_history
In [16]:
from mpl_toolkits.mplot3d import axes3d
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from JSAnimation import IPython_display

def generate(X, Y, phi):
    R = 1 - np.sqrt(X**2 + Y**2)
    return np.cos(2 * np.pi * X + phi) * R

fig = plt.figure(num=None, figsize=(12,4), dpi=80, facecolor='w', edgecolor='k')
ax1 = plt.subplot(131)
ax2 = plt.subplot(132,projection='3d')
ax3 = plt.subplot(133)

# ax = axes3d.Axes3D(fig)
x = np.linspace(0,2*np.pi,100)
y = np.sin(x)
# line = ax1.plot(x,y)
# line2 = ax3.plot(x,y)

xs = np.linspace(-1, 1, 50)
ys = np.linspace(-1, 1, 50)
X, Y = np.meshgrid(xs, ys)
Z = generate(X, Y, 0.0)
# wframe = ax2.plot_surface(X, Y, Z, rstride=2, cstride=2)
# ax2.set_zlim(-1,1)
blah = np.sin(x)


def update(i):
    # make top panel
    ax1.cla()
    ax3.cla()
    y = np.sin(i*x)
    ax1.plot(x,y)
    ax3.plot(x,y)

    # make middle panel
    ax2.cla()
    phi = i * 360 / 2 / np.pi / 100
    Z = generate(X, Y, phi)
    wframe = ax2.plot_surface(X, Y, Z, rstride=2, cstride=2)
    ax2.scatter(x[i],blah[i],1)
    ax2.set_zlim(-1,1)
    return  wframe,

animation.FuncAnimation(fig, update,frames=100, interval=100, blit=True)

# ani = animation.FuncAnimation(fig, update, 
#         frames=xrange(100), 
#         fargs=(ax, fig), interval=100)
# plt.show()
Out[16]:


Once Loop Reflect
In [12]:
c
Out[12]:
[[-2.5, -2.5, 150.10400046608794],
 [-1.3999999999999997, -2.427939324396303, 55.210148009864817],
 [-0.78399999999999959, -2.3573833920045275, 25.11996362976917],
 [-0.43903999999999954, -2.288300781357766, 15.365908373729084],
 [-0.24586239999999965, -2.2206607271200536, 12.002395892881092],
 [-0.13768294399999975, -2.1544331063852957, 10.655547571432164],
 [-0.077102448639999766, -2.0895884252622952, 9.953194768464515],
 [-0.043177371238399806, -2.0260978057399055, 9.4645265940057008],
 [0.14680306221056, -1.4044494766054711, 6.4366524075511231],
 [0.082209714837913644, -1.3552656641394598, 5.8718339064580514],
 [0.046037440309231674, -1.3071088890599929, 5.5402899006093094],
 [-0.1565272970513874, -0.83559705014816243, 4.0415690179979684],
 [-0.087655286348776923, -0.79829179446954779, 3.5338722353539436],
 [-0.049086960355315054, -0.76176553273986736, 3.2858222045500107],
 [-0.027488697798976398, -0.7260019983081778, 3.122868750249685],
 [0.093461572516519847, -0.37583465720101666, 2.2002669144111966],
 [0.052338480609251108, -0.34812998233972225, 1.992406918935578],
 [0.02930954914118062, -0.32100382580464076, 1.8782264749008479],
 [0.016413347519061143, -0.29444410719939723, 1.7954487863922444],
 [-0.055805381564807903, -0.034393019063331476, 1.2523772348379969],
 [-0.031251013676292426, -0.013818196690245858, 1.1634593623456242],
 [-0.017500567658723765, 0.0063269901841017731, 1.1085523929629626],
 [0.0595019300396607, 0.20357221876661502, 0.83126887659787774],
 [0.033321080822209977, 0.2191779428999456, 0.75370521247577937],
 [0.018659805260437576, 0.2344577943258665, 0.71383525798095793],
 [0.010449490945845033, 0.24941857779246418, 0.68642852282461719],
 [-0.035528269215873286, 0.39590235940484542, 0.51962435303439236],
 [-0.019895830760889053, 0.40749191978244192, 0.48727169649715779],
 [-0.011141665226097889, 0.41883947143390254, 0.46855188440525219],
 [0.037881661768732616, 0.5299454360151673, 0.38683466094598479],
 [0.021213730590490244, 0.53873596060875706, 0.35751805374619994],
 [0.011879689130674508, 0.54734292486100089, 0.34339171796885248],
 [0.0066526259131777041, 0.55577016180809091, 0.33423290376189879],
 [-0.022618928104804412, 0.63828278818955275, 0.28353990475028751],
 [-0.012666599738690493, 0.64481105401417826, 0.27159676666700533],
 [-0.0070932958536666984, 0.65120299911182022, 0.26513091455628685],
 [-0.0039722456780533766, 0.65746147007927846, 0.26049516927565458],
 [0.013505635305381208, 0.71873931000466773, 0.23053164403248844],
 [0.0075631557710134452, 0.72358751379765318, 0.22545125831576807],
 [0.0042353672317674996, 0.72833447927048145, 0.22235763140554612],
 [-0.014400248588009796, 0.77481289099241957, 0.20717810573553061],
 [-0.0080641392092855174, 0.7784901879120989, 0.20270856306582047],
 [-0.0045159179571999213, 0.78209069693696942, 0.20044372147848127],
 [-0.00252891405603199, 0.78561602152220977, 0.19890594748864687],
 [0.0085983077905084585, 0.82013312270277705, 0.18972137489569416],
 [0.0048150523626847037, 0.82286406003787638, 0.18786694410897312],
 [0.0026964293231034019, 0.82553797099370929, 0.18680932113743759],
 [-0.009167859698551899, 0.85171872478776534, 0.18235978487859086],
 [-0.0051340014311890966, 0.85379010377243758, 0.18066598655615343],
 [-0.0028750408014659281, 0.8558182290207218, 0.17986092551881541],
 [-0.0016100228488209517, 0.85780400374044496, 0.17934589142698085],
 [0.0054740776859909029, 0.87724708912596405, 0.17656246705204898],
 [0.0030654835041548749, 0.87878539466069217, 0.17587579593325181],
 [0.0017166707623267005, 0.88029157789304324, 0.17550940065580622],
 [0.00096133562690292119, 0.88176630958853586, 0.17524968656918538],
 [-0.0032685411314702863, 0.8962056787633994, 0.17359712104066963],
 [-0.0018303830336233932, 0.89734809834749585, 0.17330664414355271],
 [-0.0010250144988291346, 0.89846666236682082, 0.17313223969442254],
 [0.0034850492960187282, 0.90941872833930804, 0.17230208543872544],
 [0.001951627605770454, 0.91028523812251005, 0.17204437765275907],
 [0.0010929114592314209, 0.91113365378289946, 0.1719156315915171]]
In [24]:
from sympy import symbols, lambdify, sin, cos, tanh, exp, log, Max
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib import _cntr as cntr
import os
from mpl_toolkits.mplot3d import Axes3D
import math
import time
from ipywidgets import interact
from ipywidgets import widgets
from IPython.display import clear_output
from ipywidgets import interact
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from JSAnimation import IPython_display
# -*- coding: utf-8 -*-

class regression_1dim_sliders:
    
    def __init__(self):
        a = 0
        self.data = []
        self.cost_history = []
        self.x_orig = []
        
    # load in a two-dimensional dataset from csv - input should be in first column, oiutput in second column, no headers 
    def load_data(self,*args):
        # load data
        self.data = np.asarray(pd.read_csv(args[0],header = None))
   
        # center x-value of data
        self.data[:,0] = self.data[:,0] - np.mean(self.data[:,0])
        
        # center y-value of data if not performing logistic regression
        if args[-1] != 'logistic':
            self.data[:,1] = self.data[:,1] - np.mean(self.data[:,1])
        
        # transform input if needed
        self.x_orig = self.data[:,0].copy()
        if len(args) > 1:
            if args[1] == 'sin':
                self.data[:,0] = np.sin(2*np.pi*self.data[:,0])
        
    #### linear regression functions ####    
    def compute_lin_regression_cost(self,x,y,b,w):
        cost = 0
        for p in range(0,len(y)):
            cost +=(b + w*x[p] - y[p])**2
        return cost
                
    # gradient descent function
    def run_lin_regression_grad_descent(self,inits,max_its):    
        # peel off coordinates
        x = self.data[:,0]
        y = self.data[:,1]
        
        # initialize parameters - we choose this special to illustrate whats going on
        b = inits[0]    # initial intercept
        w = inits[1]      # initial slope
        P = len(y)
        
        # plot first parameters on cost surface
        cost_val = self.compute_lin_regression_cost(x,y,b,w)
        self.cost_history = []
        self.cost_history.append([b,w,cost_val])
        
        # gradient descent loop
        for k in range(1,max_its+1):   
            # compute each partial derivative - gprime_b is partial with respect to b, gprime_w the partial with respect to w            
            gprime_b = 0
            gprime_w = 0
            for p in range(0,P):
                temp = 2*(b + w*x[p] - y[p])
                gprime_b += temp
                gprime_w += temp*x[p]
            
            # set alpha via line search
            grad = np.asarray([gprime_b,gprime_w])
            grad.shape = (len(grad),1)
            alpha = self.line_search(x,y,b,w,grad,self.compute_lin_regression_cost)
            
            # take descent step in each partial derivative
            b = b - alpha*gprime_b
            w = w - alpha*gprime_w

            # compute cost function value 
            cost_val = self.compute_lin_regression_cost(x,y,b,w)
            self.cost_history.append([b,w,cost_val])   
      
    #### logistic regression functions ####
    def compute_logistic_regression_cost(self,x,y,b,w):
        cost = 0
        for p in range(0,len(y)):
            cost += np.log(1 + np.exp(-y[p]*(b + x[p]*w)))
        return cost
    
    # gradient descent function for softmax cost/logistic regression 
    def run_logistic_regression_grad_descent(self,inits,max_its):
        # peel off coordinates
        x = self.data[:,0]
        y = self.data[:,1]
        
        # initialize parameters - we choose this special to illustrate whats going on
        b = inits[0]
        w = inits[1]
        P = len(y)
        
        # plot first parameters on cost surface
        cost_val = self.compute_logistic_regression_cost(x,y,b,w)
        self.cost_history = []
        self.cost_history.append([b,w,cost_val])
        
        for k in range(max_its):
            # compute gradient
            gprime_b = 0
            gprime_w = 0
            for p in range(P):
                temp = -1/(1 + np.exp(y[p]*(b + w*x[p])))*y[p]
                gprime_b += temp
                gprime_w += temp*x[p]
            grad = np.asarray([gprime_b,gprime_w])
            grad.shape = (len(grad),1)         
            
            # compute step length via line search
            alpha = self.line_search(x,y,b,w,grad,self.compute_logistic_regression_cost)
    
            # take descent step in each partial derivative
            b = b - alpha*gprime_b
            w = w - alpha*gprime_w

            # compute cost function value 
            cost_val = self.compute_logistic_regression_cost(x,y,b,w)
            self.cost_history.append([b,w,cost_val]) 

            
    #### line search module - used for with both linear regression and logistic regression grad descent functions ####
    def line_search(self,x,y,b,w,grad,cost_fun):
        alpha = 1
        t = 0.1
        g_w = cost_fun(x,y,b,w)
        norm_w = np.linalg.norm(grad)**2
        while cost_fun(x,y,b - alpha*grad[0],w - alpha*grad[1]) > g_w - alpha*0.5*norm_w:
            alpha = t*alpha
        return alpha

    ##### plotting functions ####
   
    # show the net transformation using slider
    def fitting_slider(self,**args):  

        # pull out coordinates
        x_orig = self.x_orig
        x_tran = self.data[:,0]
        y = self.data[:,1]
        
        ##### precomputations #####
        # precompute fits input
        x_fit = np.linspace(np.min(x_orig)-1, np.max(x_orig)+1, 100)
        
        # precompute surface 
        xs = max([abs(v[0]) for v in self.cost_history])
        ys = max([abs(v[1]) for v in self.cost_history])
        minval = min(-xs,-ys)
        maxval = max(xs,ys)
        gap = (maxval - minval)*0.2
        r = np.linspace(minval - gap, maxval + gap)    
        s,t = np.meshgrid(r,r)
        s = np.reshape(s,(np.size(s),1))
        t = np.reshape(t,(np.size(t),1))

        # generate surface based on given data - done very lazily - recomputed each time
        g = 0
        P = len(y)
        if args['fit_type'] == 'line fit' or args['fit_type'] == 'sine fit':
            for p in range(0,P):
                g+= (s + t*x_tran[p] - y[p])**2
        if args['fit_type'] == 'logistic fit':
            for p in range(0,P):
                g+= np.log(1 + np.exp(-y[p]*(s + t*x_tran[p])))

        # reshape and plot the surface, as well as where the zero-plane is
        s.shape = (np.size(r),np.size(r))
        t.shape = (np.size(r),np.size(r))
        g.shape = (np.size(r),np.size(r))
        
        # slider mechanism
        ##### setup figure and panels #####
        fig = plt.figure(num=None, figsize=(12,4), dpi=80, facecolor='w', edgecolor='k')
        ax1 = plt.subplot(121)
        ax2 = plt.subplot(122,projection='3d')
                
        # animator
        def show_fit(step):
            ### initialize plot data points and fit
            # initialize fit
            vals = self.cost_history[step]
            b = vals[0]
            w = vals[1]
            yfit = 0
            # transform input if needed for plotting
            if args['fit_type'] == 'line fit':
                y_fit = b + x_fit*w
            if args['fit_type'] == 'sine fit':
                y_fit = b + np.sin(2*np.pi*x_fit)*w
            if args['fit_type'] == 'logistic fit':
                y_fit = np.tanh(b + x_fit*w)

            # plot fit to data
            ax1.cla()
            ax1.plot(x_fit,y_fit,'-r',linewidth = 3) 

            # initialize points
            ax1.scatter(x_orig,y)

            # clean up panel
            xgap = float(max(x_orig) - min(x_orig))/float(10)
            ax1.set_xlim([min(x_orig)-xgap,max(x_orig)+xgap])
            ygap = float(max(y) - min(y))/float(10)
            ax1.set_ylim([min(y)-ygap,max(y)+ygap])
            ax1.set_xticks([])
            ax1.set_yticks([])

            ### plot surface
            ax2.cla()
            artist = ax2.plot_surface(s,t,g,alpha = 0.15)
            ax2.plot_surface(s,t,g*0,alpha = 0.1)

            # plot all gradient descent steps faintly for visualization purposes
            bs = []
            ws = []
            costs = []
            for i in range(len(self.cost_history)):
                bwg = self.cost_history[i]
                b = bwg[0]
                w = bwg[1]
                cost = bwg[2]
                bs.append(b)
                ws.append(w)
                costs.append(cost)
            ax2.scatter(bs,ws,costs,color = 'm',marker = 'x',linewidth = 3, alpha = 0.1)            

            # plot current gradient descent step in bright red
            b = vals[0]
            w = vals[1]
            cost = vals[2]
            ax2.scatter(b,w,cost,marker = 'o',color = 'r',s = 50,edgecolor = 'k',linewidth = 1)            
            
            # clean up panel
            ax2.view_init(args['view'][0],args['view'][1])        
            ax2.set_xticks([])
            ax2.set_yticks([])
            ax2.set_zticks([])

            ax2.set_xlabel(args['xlabel'],fontsize = 14,labelpad = -5)
            ax2.set_ylabel(args['ylabel'],fontsize = 14,labelpad = -5)

            ax2.zaxis.set_rotate_label(False)  # disable automatic rotation
            ax2.set_zlabel('cost  ',fontsize = 14, rotation = 0,labelpad = 1)
            
            return artist,
        
        return(animation.FuncAnimation(fig, show_fit,frames=60, interval=60, blit=True))

#         interact(show_fit, step=widgets.IntSlider(min=0,max=len(self.cost_history)-1,step=1,value=0))
In [25]:
# import statements
%matplotlib inline
import sys
# sys.path.append('../../demo_python_backend_files')
from IPython import display            

# call the demonstration
csvname = 'toy_regression_data.csv'
demo = regression_1dim_sliders()
demo.load_data(csvname)

# run gradient descent
demo.run_lin_regression_grad_descent(inits = [-2.5,-2.5],max_its = 60)
In [26]:
# run slider
demo.fitting_slider(xlabel = 'intercept', ylabel= 'slope', view = [30,70], fit_type = 'line fit')
Out[26]:


Once Loop Reflect
In [ ]: